Toxicity with TensorFlowJS

Directly from Coursera DeepLearning.ai course.

<html>
<head>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs@latest"></script>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow-models/toxicity"></script>
<script>
const threshold = 0.9;
toxicity.load(threshold).then(model => {
    const sentences = ['you suck'];
    model.classify(sentences).then(predictions => {
        console.log(predictions);
        for(i=0; i<7; i++){
            if(predictions[i].results[0].match){
                console.log(predictions[i].label + 
                            " was found with probability of " + 
                            predictions[i].results[0].probabilities[1]);                
            }  
        }
    });
});
</script> 
</head>
<body></body>
</html>    

Output

[Log] Array (7) (C1_W3_Lab_1_toxicity_classifier.html, line 10)
0 Object

label: "identity_attack"

results: Array (1)
0 Object

match: false

probabilities: Float32Array [0.9659663438796997, 0.03403368219733238] (2)

Object Prototype

Array Prototype

Object Prototype
1 Object

label: "insult"

results: Array (1)
0 Object

match: true

probabilities: Float32Array (2)
0 0.0812472328543663
1 0.9187527894973755

Float32Array Prototype

Object Prototype

Array Prototype

Object Prototype
2 Object

label: "obscene"

results: Array (1)
0 Object

match: null

probabilities: Float32Array (2)
0 0.39931508898735046
1 0.6006849408149719

Float32Array Prototype

Object Prototype

Array Prototype

Object Prototype
3 Object

label: "severe_toxicity"

results: Array (1)
0 Object

match: false

probabilities: Float32Array (2)
0 0.9970395565032959
1 0.0029604369774460793

Float32Array Prototype

Object Prototype

Array Prototype

Object Prototype
4 Object

label: "sexual_explicit"

results: Array (1)
0 Object

match: null

probabilities: Float32Array (2)
0 0.7053247094154358
1 0.2946752905845642

Float32Array Prototype

Object Prototype

Array Prototype

Object Prototype
5 Object

label: "threat"

results: Array (1)
0 Object

match: false

probabilities: Float32Array (2)
0 0.9106737971305847
1 0.0893261581659317

Float32Array Prototype

Object Prototype

Array Prototype

Object Prototype
6 Object

label: "toxicity"

results: Array (1)
0 Object

match: true

probabilities: Float32Array (2)
0 0.03117678500711918
1 0.9688231945037842

Float32Array Prototype

Object Prototype

Array Prototype

Object Prototype

Array Prototype